1 Effect of UPSTM-Based Decorrelation on Feature Discovery

1.0.1 Loading the libraries

library("FRESA.CAD")
library(readxl)
library(igraph)
library(umap)
library(tsne)
library(entropy)
library(psych)
library(whitening)
library("vioplot")
library("rpart")

op <- par(no.readonly = TRUE)
pander::panderOptions('digits', 3)
pander::panderOptions('table.split.table', 400)
pander::panderOptions('keep.trailing.zeros',TRUE)

1.1 Material and Methods

Data Source https://archive.ics.uci.edu/ml/datasets/seeds

M. Charytanowicz, J. Niewczas, P. Kulczycki, P.A. Kowalski, S. Lukasik, S. Zak, ‘A Complete Gradient Clustering Algorithm for Features Analysis of X-ray Images’, in: Information Technologies in Biomedicine, Ewa Pietka, Jacek Kawa (eds.), Springer-Verlag, Berlin-Heidelberg, 2010, pp. 15-24.

1.2 The Data

seeds <- read.delim("~/GitHub/LatentBiomarkers/Data/seeds_dataset.txt", header=FALSE)
par(cex=0.5)

featnames <- c("area",
               "perimeter",
               "compactness",
               "length_of_kernel",
               "width_of_kernel",
               "asymmetry_coeff",
               "length_ker_groove",
               "class"
)
colnames(seeds) <- featnames
seeds$class <- 1*(seeds$class == 1)
pander::pander(table(seeds$class))
0 1
140 70

1.2.1 Standarize the names for the reporting

studyName <- "Seeds"
dataframe <- seeds
outcome <- "class"
thro <- 0.80
TopVariables <- 5
cexheat = 0.45

1.3 Generaring the report

1.3.1 Libraries

Some libraries

library(psych)
library(whitening)
library("vioplot")
library("rpart")

1.3.2 Data specs

pander::pander(c(rows=nrow(dataframe),col=ncol(dataframe)-1))
rows col
210 7
pander::pander(table(dataframe[,outcome]))
0 1
140 70

varlist <- colnames(dataframe)
varlist <- varlist[varlist != outcome]

largeSet <- length(varlist) > 1500 

1.3.3 Scaling the data

Scaling and removing near zero variance columns and highly co-linear(r>0.99999) columns


  ### Some global cleaning
  sdiszero <- apply(dataframe,2,sd) > 1.0e-16
  dataframe <- dataframe[,sdiszero]

  varlist <- colnames(dataframe)[colnames(dataframe) != outcome]
  tokeep <- c(as.character(correlated_Remove(dataframe,varlist,thr=0.99999)),outcome)
  dataframe <- dataframe[,tokeep]

  varlist <- colnames(dataframe)
  varlist <- varlist[varlist != outcome]
  
  iscontinous <- sapply(apply(dataframe,2,unique),length) >= 5 ## Only variables with enough samples



dataframeScaled <- FRESAScale(dataframe,method="OrderLogit")$scaledData

1.4 The heatmap of the data

numsub <- nrow(dataframe)
if (numsub > 1000) numsub <- 1000


if (!largeSet)
{

  hm <- heatMaps(data=dataframeScaled[1:numsub,],
                 Outcome=outcome,
                 Scale=TRUE,
                 hCluster = "row",
                 xlab="Feature",
                 ylab="Sample",
                 srtCol=45,
                 srtRow=45,
                 cexCol=cexheat,
                 cexRow=cexheat
                 )
  par(op)
}

1.4.0.1 Correlation Matrix of the Data

The heat map of the data


if (!largeSet)
{

  par(cex=0.6,cex.main=0.85,cex.axis=0.7)
  #cormat <- Rfast::cora(as.matrix(dataframe[,varlist]),large=TRUE)
  cormat <- cor(dataframe[,varlist],method="pearson")
  cormat[is.na(cormat)] <- 0
  gplots::heatmap.2(abs(cormat),
                    trace = "none",
  #                  scale = "row",
                    mar = c(5,5),
                    col=rev(heat.colors(5)),
                    main = "Original Correlation",
                    cexRow = cexheat,
                    cexCol = cexheat,
                     srtCol=45,
                     srtRow=45,
                    key.title=NA,
                    key.xlab="|Pearson Correlation|",
                    xlab="Feature", ylab="Feature")
  diag(cormat) <- 0
  print(max(abs(cormat)))
}

[1] 0.9943409

1.5 The decorrelation


DEdataframe <- IDeA(dataframe,verbose=TRUE,thr=thro)
#> 
#>  area 
#>             area        perimeter      compactness length_of_kernel 
#>        1.0000000        0.8571429        0.2857143        0.5714286 
#>  width_of_kernel  asymmetry_coeff 
#>        0.7142857        0.1428571 
#> 
#>  Included: 7 , Uni p: 0.02142857 , Base Size: 1 , Rcrit: 0.1398922 
#> 
#> 
 1 <R=0.994,thr=0.950>, Top: 1< 2 >[Fa= 1 ]( 1 , 2 , 0 ),<|><>Tot Used: 3 , Added: 2 , Zero Std: 0 , Max Cor: 0.950
#> 
 2 <R=0.950,thr=0.900>, Top: 1< 1 >[Fa= 1 ]( 1 , 1 , 1 ),<|><>Tot Used: 4 , Added: 1 , Zero Std: 0 , Max Cor: 0.864
#> 
 3 <R=0.864,thr=0.800>, Top: 2< 1 >[Fa= 2 ]( 2 , 3 , 1 ),<|><>Tot Used: 5 , Added: 3 , Zero Std: 0 , Max Cor: 0.712
#> 
 4 <R=0.712,thr=0.800>
#> 
 [ 4 ], 0.5972333 Decor Dimension: 5 Nused: 5 . Cor to Base: 4 , ABase: 7 , Outcome Base: 0 
#> 
varlistc <- colnames(DEdataframe)[colnames(DEdataframe) != outcome]

pander::pander(sum(apply(dataframe[,varlist],2,var)))

13

pander::pander(sum(apply(DEdataframe[,varlistc],2,var)))

10.8

pander::pander(entropy(discretize(unlist(dataframe[,varlist]), 256)))

4.55

pander::pander(entropy(discretize(unlist(DEdataframe[,varlistc]), 256)))

3.96


varratio <- attr(DEdataframe,"VarRatio")

pander::pander(tail(varratio))
class area La_length_ker_groove La_length_of_kernel La_width_of_kernel La_perimeter
1 1 0.254 0.029 0.0206 0.0113

1.5.1 The decorrelation matrix


if (!largeSet)
{

  par(cex=0.6,cex.main=0.85,cex.axis=0.7)
  
  UPLTM <- attr(DEdataframe,"UPLTM")
  
  gplots::heatmap.2(1.0*(abs(UPLTM)>0),
                    trace = "none",
                    mar = c(5,5),
                    col=rev(heat.colors(5)),
                    main = "Decorrelation matrix",
                    cexRow = cexheat,
                    cexCol = cexheat,
                   srtCol=45,
                   srtRow=45,
                    key.title=NA,
                    key.xlab="|Beta|>0",
                    xlab="Output Feature", ylab="Input Feature")
  
  par(op)
  
  
  
}

1.5.2 Formulas Network

Displaying the features associations

par(op)


#if ((ncol(dataframe) < 1000) && (ncol(dataframe) > 10))
#{

#  DEdataframeB <- ILAA(dataframe,verbose=TRUE,thr=thro,bootstrap=30)

  transform <- attr(DEdataframe,"UPLTM") != 0
  tnames <- colnames(transform)
  colnames(transform) <- str_remove_all(colnames(transform),"La_")
  transform <- abs(transform*cor(dataframe[,rownames(transform)])) # The weights are proportional to the observed correlation
  
  
  VertexSize <- attr(DEdataframe,"fscore") # The size depends on the variable independence relevance (fscore)
  names(VertexSize) <- str_remove_all(names(VertexSize),"La_")
  VertexSize <- 10*(VertexSize-min(VertexSize))/(max(VertexSize)-min(VertexSize)) # Normalization

  VertexSize <- VertexSize[rownames(transform)]
  rsum <- apply(1*(transform !=0),1,sum) + 0.01*VertexSize + 0.001*varratio[tnames]
  csum <- apply(1*(transform !=0),2,sum) + 0.01*VertexSize + 0.001*varratio[tnames]
  
  ntop <- min(10,length(rsum))


  topfeatures <- unique(c(names(rsum[order(-rsum)])[1:ntop],names(csum[order(-csum)])[1:ntop]))
  rtrans <- transform[topfeatures,]
  csum <- (apply(1*(rtrans !=0),2,sum) > 1)
  rtrans <- rtrans[,csum]
  topfeatures <- unique(c(topfeatures,colnames(rtrans)))
  print(ncol(transform))
#> [1] 5
  transform <- transform[topfeatures,topfeatures]
  print(ncol(transform))
#> [1] 5
  if (ncol(transform)>100)
  {
    csum <- (apply(1*(transform !=0),2,sum) > 1) & (apply(1*(transform !=0),1,sum) > 1)
    transform <- transform[csum,csum]
    print(ncol(transform))
  }

    if (ncol(transform) < 150)
    {

      gplots::heatmap.2(transform,
                        trace = "none",
                        mar = c(5,5),
                        col=rev(heat.colors(5)),
                        main = "Red Decorrelation matrix",
                        cexRow = cexheat,
                        cexCol = cexheat,
                       srtCol=45,
                       srtRow=45,
                        key.title=NA,
                        key.xlab="|Beta|>0",
                        xlab="Output Feature", ylab="Input Feature")
  
      par(op)
      VertexSize <- VertexSize[colnames(transform)]
      gr <- graph_from_adjacency_matrix(transform,mode = "directed",diag = FALSE,weighted=TRUE)
      gr$layout <- layout_with_fr
      
      fc <- cluster_optimal(gr)
      plot(fc, gr,
           edge.width = 2*E(gr)$weight,
           vertex.size=VertexSize,
           edge.arrow.size=0.5,
           edge.arrow.width=0.5,
           vertex.label.cex=(0.15+0.05*VertexSize),
           vertex.label.dist=0.5 + 0.05*VertexSize,
           main="Top Feature Association")
      
    }


par(op)

1.6 The heatmap of the decorrelated data

if (!largeSet)
{

  hm <- heatMaps(data=DEdataframe[1:numsub,],
                 Outcome=outcome,
                 Scale=TRUE,
                 hCluster = "row",
                 cexRow = cexheat,
                 cexCol = cexheat,
                 srtCol=45,
                 srtRow=45,
                 xlab="Feature",
                 ylab="Sample")
  par(op)
}

1.7 The correlation matrix after decorrelation

if (!largeSet)
{

  cormat <- cor(DEdataframe[,varlistc],method="pearson")
  cormat[is.na(cormat)] <- 0
  
  gplots::heatmap.2(abs(cormat),
                    trace = "none",
                    mar = c(5,5),
                    col=rev(heat.colors(5)),
                    main = "Correlation after ILAA",
                    cexRow = cexheat,
                    cexCol = cexheat,
                     srtCol=45,
                     srtRow=45,
                    key.title=NA,
                    key.xlab="|Pearson Correlation|",
                    xlab="Feature", ylab="Feature")
  
  par(op)
  diag(cormat) <- 0
  print(max(abs(cormat)))
}

[1] 0.7116456

1.8 U-MAP Visualization of features

1.8.1 The UMAP on Raw Data


  classes <- unique(dataframe[1:numsub,outcome])
  raincolors <- rainbow(length(classes))
  names(raincolors) <- classes
  topvars <- univariate_BinEnsemble(dataframe,outcome)
  lso <- LASSO_MIN(formula(paste(outcome,"~.")),dataframe,family="binomial")
  topvars <- unique(c(names(topvars),lso$selectedfeatures))
  pander::pander(head(topvars))

asymmetry_coeff, length_ker_groove, compactness, length_of_kernel, perimeter and width_of_kernel

#  names(topvars)
#if (nrow(dataframe) < 1000)
#{
  datasetframe.umap = umap(scale(dataframe[1:numsub,topvars]),n_components=2)
#  datasetframe.umap = umap(dataframe[1:numsub,varlist],n_components=2)
  plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: Original",t='n')
  text(datasetframe.umap$layout,labels=dataframe[1:numsub,outcome],col=raincolors[dataframe[1:numsub,outcome]+1])

#}

1.8.2 The decorralted UMAP

  varlistcV <- names(varratio[varratio >= 0.025])
  topvars <- univariate_BinEnsemble(DEdataframe[,varlistcV],outcome)
  lso <- LASSO_MIN(formula(paste(outcome,"~.")),DEdataframe,family="binomial")
  topvars <- unique(c(names(topvars),lso$selectedfeatures))
  pander::pander(head(topvars))

La_length_ker_groove, asymmetry_coeff, compactness, La_length_of_kernel, area and La_perimeter


  varlistcV <- varlistcV[varlistcV != outcome]
  
#  DEdataframe[,outcome] <- as.numeric(DEdataframe[,outcome])
#if (nrow(dataframe) < 1000)
#{
  datasetframe.umap = umap(scale(DEdataframe[1:numsub,topvars]),n_components=2)
#  datasetframe.umap = umap(DEdataframe[1:numsub,varlistcV],n_components=2)
  plot(datasetframe.umap$layout,xlab="U1",ylab="U2",main="UMAP: After ILAA",t='n')
  text(datasetframe.umap$layout,labels=DEdataframe[1:numsub,outcome],col=raincolors[DEdataframe[1:numsub,outcome]+1])

#}

1.9 Univariate Analysis

1.9.1 Univariate



univarRAW <- uniRankVar(varlist,
               paste(outcome,"~1"),
               outcome,
               dataframe,
               rankingTest="AUC")



univarDe <- uniRankVar(varlistc,
               paste(outcome,"~1"),
               outcome,
               DEdataframe,
               rankingTest="AUC",
               )

1.9.2 Final Table


univariate_columns <- c("caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC")

##top variables
topvar <- c(1:length(varlist)) <= TopVariables
tableRaw <- univarRAW$orderframe[topvar,univariate_columns]
pander::pander(tableRaw)
  caseMean caseStd controlMean controlStd controlKSP ROCAUC
asymmetry_coeff 2.67 1.1739 4.217 1.3818 9.80e-01 0.810
length_ker_groove 5.09 0.2637 5.569 0.5009 1.98e-03 0.764
compactness 0.88 0.0162 0.866 0.0254 5.69e-01 0.653
length_of_kernel 5.51 0.2315 5.689 0.5075 3.68e-04 0.562
perimeter 14.29 0.5766 14.692 1.5318 2.01e-05 0.524


topLAvar <- univarDe$orderframe$Name[str_detect(univarDe$orderframe$Name,"La_")]
topLAvar <- unique(c(univarDe$orderframe$Name[topvar],topLAvar[1:as.integer(TopVariables/2)]))
finalTable <- univarDe$orderframe[topLAvar,univariate_columns]


pander::pander(finalTable)
  caseMean caseStd controlMean controlStd controlKSP ROCAUC
La_length_ker_groove 3.00 0.1832 3.365 0.1729 0.849 0.927
asymmetry_coeff 2.67 1.1739 4.217 1.3818 0.980 0.810
La_width_of_kernel 5.58 0.0483 5.529 0.0499 0.828 0.753
compactness 0.88 0.0162 0.866 0.0254 0.569 0.653
La_length_of_kernel -3.17 0.0582 -3.144 0.0817 0.601 0.610

dc <- getLatentCoefficients(DEdataframe)
fscores <- attr(DEdataframe,"fscore")


pander::pander(c(mean=mean(sapply(dc,length)),total=length(dc),fraction=length(dc)/(ncol(dataframe)-1)))
mean total fraction
2.5 4 0.571

theCharformulas <- attr(dc,"LatentCharFormulas")


finalTable <- rbind(finalTable,tableRaw[topvar[!(topvar %in% topLAvar)],univariate_columns])


orgnamez <- rownames(finalTable)
orgnamez <- str_remove_all(orgnamez,"La_")
finalTable$RAWAUC <- univarRAW$orderframe[orgnamez,"ROCAUC"]
finalTable$DecorFormula <- theCharformulas[rownames(finalTable)]
finalTable$fscores <- fscores[rownames(finalTable)]

Final_Columns <- c("DecorFormula","caseMean","caseStd","controlMean","controlStd","controlKSP","ROCAUC","RAWAUC","fscores")

finalTable <- finalTable[order(-finalTable$ROCAUC),]
pander::pander(finalTable[,Final_Columns])
  DecorFormula caseMean caseStd controlMean controlStd controlKSP ROCAUC RAWAUC fscores
La_length_ker_groove - (0.146)area + length_ker_groove 3.00 0.1832 3.365 0.1729 8.49e-01 0.927 0.764 -1
asymmetry_coeff NA 2.67 1.1739 4.217 1.3818 9.80e-01 0.810 0.810 0
asymmetry_coeff1 NA 2.67 1.1739 4.217 1.3818 9.80e-01 0.810 NA NA
length_ker_groove NA 5.09 0.2637 5.569 0.5009 1.98e-03 0.764 0.764 NA
La_width_of_kernel - (0.360)area + (0.524)perimeter + width_of_kernel 5.58 0.0483 5.529 0.0499 8.28e-01 0.753 0.501 -2
compactness NA 0.88 0.0162 0.866 0.0254 5.69e-01 0.653 0.653 0
compactness1 NA 0.88 0.0162 0.866 0.0254 5.69e-01 0.653 NA NA
La_length_of_kernel + (0.228)area - (0.836)perimeter + length_of_kernel -3.17 0.0582 -3.144 0.0817 6.01e-01 0.610 0.562 -2
length_of_kernel NA 5.51 0.2315 5.689 0.5075 3.68e-04 0.562 0.562 NA
perimeter NA 14.29 0.5766 14.692 1.5318 2.01e-05 0.524 0.524 NA

1.10 Comparing ILAA vs PCA vs EFA

1.10.1 PCA

featuresnames <- colnames(dataframe)[colnames(dataframe) != outcome]
pc <- prcomp(dataframe[,iscontinous],center = TRUE,scale. = TRUE)   #principal components
predPCA <- predict(pc,dataframe[,iscontinous])
PCAdataframe <- as.data.frame(cbind(predPCA,dataframe[,!iscontinous]))
colnames(PCAdataframe) <- c(colnames(predPCA),colnames(dataframe)[!iscontinous]) 
#plot(PCAdataframe[,colnames(PCAdataframe)!=outcome],col=dataframe[,outcome],cex=0.65,cex.lab=0.5,cex.axis=0.75,cex.sub=0.5,cex.main=0.75)

#pander::pander(pc$rotation)


PCACor <- cor(PCAdataframe[,colnames(PCAdataframe) != outcome])


  gplots::heatmap.2(abs(PCACor),
                    trace = "none",
  #                  scale = "row",
                    mar = c(5,5),
                    col=rev(heat.colors(5)),
                    main = "PCA Correlation",
                    cexRow = 0.5,
                    cexCol = 0.5,
                     srtCol=45,
                     srtRow= -45,
                    key.title=NA,
                    key.xlab="Pearson Correlation",
                    xlab="Feature", ylab="Feature")

1.10.2 EFA


EFAdataframe <- dataframeScaled

if (length(iscontinous) < 2000)
{
  topred <- min(length(iscontinous),nrow(dataframeScaled),ncol(predPCA)/2)
  if (topred < 2) topred <- 2
  
  uls <- fa(dataframeScaled[,iscontinous],nfactors=topred,rotate="varimax",warnings=FALSE)  # EFA analysis
  predEFA <- predict(uls,dataframeScaled[,iscontinous])
  EFAdataframe <- as.data.frame(cbind(predEFA,dataframeScaled[,!iscontinous]))
  colnames(EFAdataframe) <- c(colnames(predEFA),colnames(dataframeScaled)[!iscontinous]) 


  
  EFACor <- cor(EFAdataframe[,colnames(EFAdataframe) != outcome])
  
  
    gplots::heatmap.2(abs(EFACor),
                      trace = "none",
    #                  scale = "row",
                      mar = c(5,5),
                      col=rev(heat.colors(5)),
                      main = "EFA Correlation",
                      cexRow = 0.5,
                      cexCol = 0.5,
                       srtCol=45,
                       srtRow= -45,
                      key.title=NA,
                      key.xlab="Pearson Correlation",
                      xlab="Feature", ylab="Feature")
}

1.11 Effect on CAR modeling

par(op)
par(xpd = TRUE)
dataframe[,outcome] <- factor(dataframe[,outcome])
rawmodel <- rpart(paste(outcome,"~."),dataframe,control=rpart.control(maxdepth=3))
pr <- predict(rawmodel,dataframe,type = "class")

  ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
  if (length(unique(pr))>1)
  {
    plot(rawmodel,main="Raw",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
    text(rawmodel, use.n = TRUE,cex=0.75)
    ptab <- epiR::epi.tests(table(pr==0,dataframe[,outcome]==0))
  }


pander::pander(table(dataframe[,outcome],pr))
  0 1
0 134 6
1 21 49
pander::pander(ptab$detail[c(5,3,4,6),])
  statistic est lower upper
5 diag.ac 0.871 0.818 0.914
3 se 0.700 0.579 0.804
4 sp 0.957 0.909 0.984
6 diag.or 52.111 19.864 136.710

par(op)
par(xpd = TRUE)
DEdataframe[,outcome] <- factor(DEdataframe[,outcome])
IDeAmodel <- rpart(paste(outcome,"~."),DEdataframe[,c(outcome,varlistcV)],control=rpart.control(maxdepth=3))
pr <- predict(IDeAmodel,DEdataframe,type = "class")

  ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
  if (length(unique(pr))>1)
  {
    plot(IDeAmodel,main="ILAA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
    text(IDeAmodel, use.n = TRUE,cex=0.75)
    ptab <- epiR::epi.tests(table(pr==0,DEdataframe[,outcome]==0))
  }

pander::pander(table(DEdataframe[,outcome],pr))
  0 1
0 130 10
1 7 63
pander::pander(ptab$detail[c(5,3,4,6),])
  statistic est lower upper
5 diag.ac 0.919 0.874 0.952
3 se 0.900 0.805 0.959
4 sp 0.929 0.873 0.965
6 diag.or 117.000 42.543 321.768

par(op)
par(xpd = TRUE)
PCAdataframe[,outcome] <- factor(PCAdataframe[,outcome])
PCAmodel <- rpart(paste(outcome,"~."),PCAdataframe,control=rpart.control(maxdepth=3))
pr <- predict(PCAmodel,PCAdataframe,type = "class")
ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
if (length(unique(pr))>1)
{
  plot(PCAmodel,main="PCA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
  text(PCAmodel, use.n = TRUE,cex=0.75)
  ptab <- epiR::epi.tests(table(pr==0,PCAdataframe[,outcome]==0))
}

pander::pander(table(PCAdataframe[,outcome],pr))
  0 1
0 131 9
1 11 59
pander::pander(ptab$detail[c(5,3,4,6),])
  statistic est lower upper
5 diag.ac 0.905 0.857 0.941
3 se 0.843 0.736 0.919
4 sp 0.936 0.881 0.970
6 diag.or 78.071 30.711 198.465


par(op)

1.11.1 EFA


  EFAdataframe[,outcome] <- factor(EFAdataframe[,outcome])
  EFAmodel <- rpart(paste(outcome,"~."),EFAdataframe,control=rpart.control(maxdepth=3))
  pr <- predict(EFAmodel,EFAdataframe,type = "class")
  
  ptab <- list(er="Error",detail=matrix(nrow=6,ncol=1))
  if (length(unique(pr))>1)
  {
    plot(EFAmodel,main="EFA",branch=0.5,uniform = TRUE,compress = TRUE,margin=0.1)
    text(EFAmodel, use.n = TRUE,cex=0.75)
    ptab <- epiR::epi.tests(table(pr==0,EFAdataframe[,outcome]==0))
  }


  pander::pander(table(EFAdataframe[,outcome],pr))
  0 1
0 133 7
1 12 58
  pander::pander(ptab$detail[c(5,3,4,6),])
  statistic est lower upper
5 diag.ac 0.910 0.862 0.945
3 se 0.829 0.720 0.908
4 sp 0.950 0.900 0.980
6 diag.or 91.833 34.403 245.137
  par(op)